library(lmerTest)
Loading required package: lme4
Loading required package: Matrix

Attaching package: ‘lmerTest’

The following object is masked from ‘package:lme4’:

    lmer

The following object is masked from ‘package:stats’:

    step
library(ggeffects)
library(dplyr)
package ‘dplyr’ was built under R version 4.0.5
Attaching package: ‘dplyr’

The following objects are masked from ‘package:stats’:

    filter, lag

The following objects are masked from ‘package:base’:

    intersect, setdiff, setequal, union
library(report)
package ‘report’ was built under R version 4.0.5
library(r2glmm)
library(performance)
package ‘performance’ was built under R version 4.0.5
predict(m)
         1          2          3          4          5          6          7          8          9         10 
 0.5596158  1.5040774 -0.9808293  0.8472979 -0.5596158 -0.5596158 14.5660678  0.1823216  0.1823216  0.8472979 
        11         12         13         14         15         16         17         18         19         20 
-0.5596158 -0.5596158  0.1823216 -0.5596158  0.5596158 -0.8472979  0.1823216  0.1823216 -0.5596158 -0.1823216 
        21         22         23         24         25         26         27         28         29         30 
-0.1823216  1.5040774  1.3862944  0.9808293 -0.1823216  0.5596158 -0.1823216 -0.5596158  0.4054651 -0.9808293 
        31         32         33         34         35         36         37         38         39         40 
 0.5596158  0.5596158 -0.1823216  0.5596158  0.1823216  0.9808293  1.5040774 -0.5596158  0.4054651  0.5596158 
        41         42         43         44         45         46         47         48         49         50 
-1.5040774 -0.1823216  0.9808293  1.5040774 -0.4054651  0.1823216  0.5596158 -0.9808293 -0.5596158 -0.5596158 
        51         52         53         54         55         56         57         58         59         60 
 0.1823216  0.1823216  0.1823216  0.9808293 -0.5596158 -0.1823216 -0.1823216  0.4054651  0.5596158  0.5596158 
        61         62         63         64         65         66         67         68         69         70 
 0.5596158  0.5596158  0.1823216 -0.1823216  0.5596158  0.9808293  0.5596158  0.1823216 -0.1823216 -0.1823216 
        71         72         73         74         75         76         77         78         79         80 
-0.1823216  0.5596158  0.5596158  0.5596158 -0.1823216  0.1823216 -0.1823216 -0.5596158  2.3025851  0.9808293 
        81         82         83         84         85         86         87         88         89         90 
 0.4054651 -0.5596158 -0.4054651  1.3862944  1.5040774 -0.8472979  0.9808293  0.1823216 -0.1823216 -0.1823216 
        91         92         93         94         95         96         97         98         99        100 
 0.5596158  0.9808293 -0.9808293  0.4054651  0.1823216 -0.5596158 -0.9808293  0.4054651  0.1823216 -0.5596158 
       101        102        103        104        105        106        107        108        109        110 
 0.1823216 -0.1823216 -0.5596158  0.4054651 -0.9808293  0.1823216  1.5040774  0.1823216  2.3025851 -0.1823216 
       111        112        113        114        115        116        117        118        119        120 
 0.9808293  1.5040774  0.1823216 -0.1823216 -0.1823216 -0.1823216  0.9808293 -0.1823216  1.5040774  0.1823216 
       121        122        123        124        125        126        127        128        129        130 
 0.1823216 -2.1972246  0.1823216  0.5596158 -1.3862944  0.9808293  1.3862944 -0.8472979 -0.5596158  1.3862944 
       131        132        133        134        135        136        137        138        139        140 
-0.5596158  1.5040774  0.6931472  0.5596158  0.5596158  0.9808293  0.1823216  0.9808293  0.1823216 -0.1823216 
       141        142        143        144        145        146        147        148        149        150 
 0.1823216  0.4054651  0.9808293 -1.3862944  0.1823216 -0.5596158 -0.5596158  0.5596158 -0.1823216 -0.5596158 
       151        152        153        154        155        156        157        158        159        160 
-0.1823216  0.5596158 -0.5596158  0.1823216 -0.1823216  0.1823216 -0.5596158  0.5596158 -0.5596158  0.9808293 
       162        163        165        166        167        168        169        170        171        174 
-0.9808293  2.3025851  0.9808293 -0.5596158  0.1823216 14.5660678  0.9808293  0.5596158 -0.1823216 -0.1823216 
       175        176        177        178        179        180        181        182        183        184 
 0.1823216  0.5596158 -0.1823216  0.1823216  0.1823216 -0.5596158 -0.8472979  0.1823216  0.5596158  0.5596158 
       185        186        187        189        190        191        192        193        194        195 
 0.5596158  2.3025851  0.9808293 -0.9808293 -0.1823216 -0.5596158  1.5040774  0.5596158 -0.1823216  0.1823216 
       196        197        198        199        200        201        202        203        204        205 
 0.1823216 -0.4054651  0.5596158  1.5040774  0.5596158 -0.5596158 -0.5596158  0.5596158 -0.9808293  0.9808293 
       206        207        208        209        210        211        212        213        214        215 
-0.1823216 -1.5040774  0.9808293  0.5596158 -0.5596158  0.5596158  1.5040774  1.3862944 -0.5596158 -0.1823216 
       217        218        219        220        221        222        223        224        225        226 
 0.1823216 -0.1823216  0.1823216  0.5596158  1.5040774  0.1823216 -0.1823216  0.1823216 -0.5596158 -0.1823216 
       227        228        232        233        234        235        236        237        238        239 
 0.1823216 -0.5596158 -0.1823216  1.5040774  0.5596158  0.9808293  0.5596158  0.5596158  0.9808293 -0.1823216 
       240        241        242        243        244        245        246        247        248        249 
 0.1823216 -0.1823216  0.9808293  0.9808293  0.1823216  0.5596158  0.1823216  0.9808293 -0.1823216  1.5040774 
       250        251        252        253        254        255        256        257        258        259 
-0.1823216  0.5596158  0.1823216 -0.5596158  0.1823216  1.5040774  0.1823216  0.5596158 -0.1823216  0.5596158 
       260        261        262        263        264        269        270        271        272        273 
 0.1823216  0.1823216  1.5040774 -0.1823216  0.9808293  0.1823216 -0.5596158  0.1823216  0.9808293 -0.5596158 
       274        275        276        278        279        280        281        282        284        285 
-0.1823216  0.9808293  0.5596158 -0.5596158 -0.5596158  0.1823216  0.1823216 -0.9808293  0.1823216 -0.5596158 
       286        287        289        290        293        294        297        298        299        300 
-0.5596158 -0.9808293 -0.1823216 -0.1823216  1.5040774 -0.9808293  0.4054651  0.1823216  0.5596158  0.5596158 
       301        302        303        304        305        306        307        308        309        310 
 1.5040774  0.1823216 -0.1823216  0.1823216 -0.1823216 -0.4054651  0.9808293 -0.5596158  0.5596158  1.5040774 
       311        312        313        314        315        316        317        318        319        320 
 0.5596158  1.3862944  0.1823216 -0.5596158 -0.1823216 -0.5596158 -0.1823216 -0.1823216  0.5596158 -0.1823216 
       321        322        323        324        325        326        327        328        329        330 
 0.1823216  0.5596158  0.5596158  0.4054651 14.5660678  0.9808293  0.1823216 -0.5596158  0.5596158 -0.1823216 
       331        332        333        334        335        336        337        338        339        340 
-1.5040774  0.1823216 -0.5596158  0.8472979 -0.9808293  0.4054651  2.3025851  0.1823216  0.1823216 -0.9808293 
       341        342        343        344        345        346        347        348        349        350 
-0.8472979  0.1823216 -0.1823216  0.5596158  0.5596158  1.5040774 -0.5596158  0.1823216 -0.1823216  0.9808293 
       351        352        353        354        355        356        357        358        359        360 
-0.8472979  1.5040774 -0.1823216  0.5596158  1.3862944 -0.1823216 -0.1823216 -2.1972246  0.1823216 -0.1823216 
       361        362        363        364        365        366        367        368        369        370 
-0.1823216  0.9808293  0.9808293 -0.5596158 -0.5596158  0.4054651  0.1823216  0.1823216 -0.1823216  0.9808293 
       371        372        373        374        375        376        377        378        379        380 
 0.5596158  0.5596158  0.5596158 -0.9808293  0.1823216  0.9808293 -0.5596158  0.1823216  0.1823216 -0.5596158 
       381        382        383        384        385        386        387        388        389        390 
 0.9808293 -0.9808293 -0.5596158  0.1823216  0.1823216  0.8472979 -0.5596158 -0.5596158  0.9808293 -0.1823216 
       391        392        393        394        395        396        397        398        399        400 
-0.4054651  1.5040774 -0.5596158 -0.5596158  0.4054651  0.5596158  0.1823216  2.3025851  0.4054651  0.5596158 
       401        402        403        404        405        406        407        408        409        410 
 0.1823216 -0.9808293 -0.5596158  0.9808293 -0.8472979  0.1823216  0.6931472  0.9808293  0.1823216 -0.5596158 
       411        412        413        414        415        416        417        418        419        420 
-0.5596158  1.5040774  1.3862944  1.5040774  0.5596158  0.9808293 -0.5596158  0.1823216 -0.1823216  1.3862944 
       421        422        423        424        425        426        427        428        429        430 
 0.9808293  1.5040774 -0.1823216  0.5596158 -0.1823216  0.4054651 -0.1823216 -0.5596158 -1.3862944 -0.9808293 
       431        432        433        434        435        436        437        438        439        440 
-0.1823216 -1.3862944  0.9808293  0.5596158  0.4054651  0.1823216  0.5596158  0.5596158  0.5596158  1.5040774 
       441        442        443        444        445        446        447        448        449        450 
 0.1823216 -0.1823216  0.1823216 -0.1823216  0.4054651  0.9808293 -0.9808293 -0.5596158 -0.1823216 -0.1823216 
       451        452        453        454        455        456        457        458        459        460 
 0.5596158 -2.1972246  0.5596158  0.1823216  0.9808293  0.1823216  0.9808293 -0.1823216  0.4054651  0.4054651 
       461        462        463        464        465        466        467        468        469        470 
 0.5596158 -0.5596158 -0.1823216  0.1823216 -0.9808293 -0.9808293  1.3862944 -0.5596158 -0.1823216  0.1823216 
       471        472        473        474        475        476        477        478        479        480 
 2.3025851  0.1823216  0.1823216 -0.5596158 -0.1823216  0.5596158  0.9808293  0.1823216 -0.5596158  0.6931472 
       481        482        483        484        485        486        487        488        489        490 
-0.9808293  0.5596158  0.1823216  1.5040774  0.1823216  0.5596158  0.5596158  0.9808293  1.3862944 -0.1823216 
       491        492        493        494        495        496        497        498        499        500 
 0.1823216  0.5596158 -0.5596158  1.5040774  0.1823216  0.1823216  0.1823216 14.5660678  0.5596158 -0.5596158 
       501        502        503        504        505        506        507        508        509        510 
 0.5596158  0.9808293  0.9808293 -0.4054651 -0.9808293 -0.1823216 -0.1823216 -0.5596158 -1.3862944  0.1823216 
       511        512        513        514        515        516        517        518        519        520 
-0.5596158  0.1823216 -0.1823216 -0.5596158  0.4054651  0.8472979  0.5596158 -0.5596158  0.5596158  0.9808293 
       521        522        523        524        525        526        527        528        529        530 
 0.5596158 -0.5596158  0.1823216 -0.1823216  1.5040774 -0.1823216  0.4054651  0.5596158 -1.5040774 -0.5596158 
       531        532        533        534        535        536        537        538        539        540 
 1.3862944  0.9808293  0.1823216  1.5040774 -0.1823216 -0.5596158  0.1823216 -0.1823216  0.5596158 -0.5596158 
       541        542        543        544        545        546        547        548        549        550 
-0.5596158 -0.5596158  0.1823216  0.5596158  1.5040774  2.3025851  1.5040774  0.5596158 -0.4054651  0.1823216 
       551        552        553        554        555        556        557        558        559        560 
-0.1823216 -0.1823216  1.3862944  0.5596158  0.4054651 -0.1823216  0.5596158 -0.1823216  0.1823216 -0.8472979 
       561        562        563        564        565        566        567        568        569        570 
-0.5596158 -0.1823216 -0.1823216 -0.1823216 -0.1823216  0.1823216  0.9808293  1.5040774 -0.5596158  0.4054651 
       571        572        573        574        575        576        577        578        579        580 
 0.5596158  0.1823216  0.9808293  0.5596158  1.5040774  0.1823216  0.1823216 -0.5596158  0.9808293  0.4054651 
       581        582        583        584        585        586        587        588        589        590 
 0.9808293 -1.3862944  0.8472979 -0.1823216  1.5040774  0.1823216 -0.8472979  0.9808293  0.5596158  0.1823216 
       592        593        594        595        596        597        598        599        600        601 
-0.9808293  0.1823216  1.3862944  0.9808293  0.9808293  1.5040774 -0.1823216  0.5596158  0.5596158  0.5596158 
       602        603        604        605        606        607        608        609        610        611 
 0.4054651 -1.5040774 -0.5596158  0.8472979  0.1823216 -0.5596158  0.1823216  0.9808293  0.5596158  1.5040774 
       612        613        614        615        616        617        618        619        620        621 
 0.5596158  0.1823216  0.1823216 -0.5596158 -0.1823216 -0.1823216 -0.1823216 -0.1823216 -0.5596158 -0.1823216 
       622        623        624        625        626        627        628        629        630        631 
 0.4054651 -2.1972246  0.5596158 -0.1823216  0.1823216 -0.9808293 -0.1823216  0.1823216  0.1823216 -0.5596158 
       632        633        634        635        636        637        638        639        640        641 
 0.5596158 -0.9808293  0.5596158 -0.9808293  0.5596158  0.1823216 -0.5596158  0.1823216  0.1823216 -0.1823216 
       642        643        644        645        646        647        648        649        650        651 
 0.5596158  1.5040774 -0.4054651 -0.9808293  0.5596158 -0.5596158 -0.5596158  0.9808293 -0.5596158  1.5040774 
       652        653        654        655        656        657        658        659        660        661 
 0.8472979 -0.8472979  0.1823216  0.9808293  1.3862944 -0.1823216  1.5040774 -1.3862944  0.1823216  0.9808293 
       662        663        664        665        666        667        668        669        670        671 
 0.5596158  1.3862944  0.4054651  0.1823216 -0.8472979  1.5040774  0.1823216  0.1823216 -0.5596158 -0.1823216 
       672        673        674        675        676        677        678        679        680        681 
 0.5596158 14.5660678  2.3025851  0.1823216 -0.1823216  0.9808293 -0.8472979  0.1823216 -0.5596158 -0.1823216 
       682        683        684        685        686        687        688        689        690        691 
 0.1823216  0.4054651  0.4054651  1.5040774  0.5596158 -0.1823216  0.4054651  0.6931472 -0.1823216  0.5596158 
       692        693        694        695        696        697        698        699        700        701 
 0.9808293 -0.1823216 -0.1823216  0.1823216  1.5040774  0.9808293 -0.5596158 -0.5596158  0.5596158  1.3862944 
       702        703        704        705        706        707        708        709        710        711 
-0.5596158  0.1823216  0.9808293 -0.5596158 -0.1823216  0.1823216  0.5596158  0.9808293  0.1823216 -0.5596158 
       712        713        714        715        716        717        718        719        720        721 
 0.5596158 -0.5596158 -0.5596158 -0.1823216  0.4054651 -0.9808293  0.9808293  2.3025851  0.5596158  0.1823216 
       722        723        724        725        726        727        728        729        730        731 
-0.1823216 -0.5596158  1.5040774 -0.1823216 -0.4054651  0.4054651  0.5596158  0.1823216 -0.9808293  0.9808293 
       732        733        734        735        736        737        738        739        740        741 
 0.5596158  0.1823216  0.9808293 -0.1823216  0.5596158  0.1823216 -1.3862944 -0.1823216 -0.5596158  0.1823216 
       742        743        744        745        746        747        748        749        750        751 
 1.5040774  0.5596158  0.4054651  1.5040774 -0.5596158  0.9808293  0.5596158  0.4054651  0.5596158  0.9808293 
       752        753        755        756        757        758        759        760        761        762 
-0.1823216  0.9808293  0.5596158  0.5596158 -0.5596158  0.9808293 -0.5596158  0.1823216 -0.1823216  0.5596158 
       763        764        765        766        767        768        769        770        771        772 
 0.1823216  1.5040774  0.1823216 -0.9808293 -0.5596158  0.9808293  0.4054651 -0.1823216  0.5596158  0.9808293 
       773        774        775        776        777        778        779        780        781        782 
 0.1823216  0.5596158  1.5040774  2.3025851  0.1823216  0.5596158 -0.5596158 -0.5596158  0.5596158  0.1823216 
       783        784        785        786        787        788        789        790        791        792 
-0.1823216 -0.5596158 -0.5596158  0.1823216 -0.9808293  1.3862944  0.1823216 -0.1823216 -0.5596158 -0.1823216 
       793        794        795        796        797        798        799        800        801        802 
 0.5596158  0.9808293  1.5040774  1.5040774  0.5596158  0.1823216  0.1823216  0.8472979 -0.8472979  0.1823216 
       803        804        805        806        807        808        809        810        811        812 
-0.9808293  0.9808293  0.4054651  2.3025851 -1.3862944  0.9808293 -1.5040774 -2.1972246 -0.1823216 -0.5596158 
       813        814        815        816        817        818        819        820        821        822 
-0.5596158 -0.1823216  0.5596158  0.8472979  0.9808293  0.5596158  0.4054651 -0.1823216  0.5596158  1.5040774 
       823        824        825        826        827        828        829        830        831        832 
-0.5596158  0.4054651  0.9808293 14.5660678  0.5596158  1.3862944  0.4054651 -0.1823216 -0.1823216 -0.5596158 
       833        834        835        836        837        838        839        840        841        842 
 0.9808293 -0.5596158  0.1823216  0.1823216 -0.1823216  0.1823216 -0.9808293 -0.5596158 -0.5596158 -0.1823216 
       843        844        845        846        847        848        849        850        851        852 
 0.5596158 -0.1823216  1.5040774  0.5596158 -0.4054651 -0.5596158  0.4054651  0.1823216  0.1823216  0.9808293 
       853        854        855        856        857        858        859        860        861        862 
-0.1823216 -0.1823216  1.3862944 -0.4054651 -0.5596158 -0.5596158  0.5596158 -0.8472979  0.1823216 -0.5596158 
       863        864        865        866        867        868        869        870        871        872 
-0.1823216  0.1823216 -0.8472979  0.1823216 -0.1823216 -0.1823216 -0.1823216 -0.9808293 -0.1823216  1.5040774 
       873        874        875        876        877        878        879        880        881        882 
 0.1823216  1.3862944 -1.3862944  0.5596158  0.1823216  0.1823216  0.1823216  0.9808293  0.5596158  0.1823216 
       883        884        885        886        887        888        889        890        891        892 
-0.1823216  0.5596158 -0.1823216  0.1823216 -0.9808293  0.1823216  1.3862944  0.5596158  1.5040774 -1.3862944 
       893        894        895        896        897        898        899        900        901        902 
 0.5596158  0.5596158 -0.1823216  0.1823216 -0.1823216 -0.4054651  0.1823216  0.1823216 -0.1823216 -0.8472979 
       903        904        905        906        907        908        909        910        911        912 
-0.5596158  0.5596158 -0.5596158 -0.5596158  1.5040774  0.9808293  0.5596158  0.9808293  0.1823216  0.4054651 
       913        914        915        916        917        918        919        920        921        922 
-0.1823216  0.5596158 -0.1823216  0.9808293 -0.1823216 -0.5596158  0.4054651 -0.5596158  0.1823216  0.5596158 
       923        924        925        926        927        928        929        930        931        932 
-0.1823216  0.9808293  0.1823216  0.5596158 -0.1823216  0.9808293  0.5596158  2.3025851  0.9808293 -0.5596158 
       933        934        935        936        937        938        939        940        941        942 
 0.1823216  0.9808293  0.1823216 -0.9808293 -0.9808293  1.5040774  0.9808293 -1.5040774  0.9808293 -0.5596158 
       943        944        945        946        947        948        949        950        951        952 
 0.4054651 -0.9808293  0.1823216  1.3862944  0.5596158 -0.9808293  1.3862944 -0.5596158 -0.5596158 -0.8472979 
       953        954        955        956        957        958        959        960        961        962 
 0.1823216  0.5596158  0.1823216  0.1823216 -0.1823216  0.5596158  1.5040774 -0.5596158 -0.1823216 -0.1823216 
       963        964        965        966        967        968        969        970        971        972 
 1.5040774 -0.5596158 -0.1823216 -2.1972246  0.4054651 -0.9808293  0.4054651 -0.1823216  0.1823216 -0.1823216 
       973        974        975        976        977        978        979        980        981        982 
 0.1823216  1.5040774 -0.5596158  0.5596158 -0.9808293 -0.1823216  0.5596158  0.9808293  0.5596158 -0.4054651 
       983        984        985        986        987        988        989        990        991        992 
 0.1823216  0.1823216  0.1823216  0.4054651 -0.5596158  0.1823216 -0.8472979 -0.1823216  0.1823216 -0.1823216 
       993        994        995        996        997        998        999       1000       1001       1002 
 0.1823216  0.4054651 -0.5596158  1.5040774  0.5596158  0.8472979  0.1823216  0.9808293  0.4054651  0.6931472 
      1003       1004       1005       1006       1007       1008       1009       1010       1011       1012 
-0.5596158 -0.5596158 -0.1823216  0.1823216  2.3025851  0.5596158  0.5596158  0.5596158  0.1823216 -0.5596158 
      1013       1014       1015       1016       1017       1018       1019       1020       1021       1022 
 0.1823216 -0.1823216  0.8472979  1.5040774  0.9808293 14.5660678  1.3862944  0.5596158  0.1823216 -0.1823216 
 [ reached getOption("max.print") -- omitted 604 entries ]
m <- betareg( (optionChoiceN-.001) ~ trait | trait, data=traitsFreqs)
Error in terms.formula(paste_formula(NULL, attr(Formula, "lhs"), rsep = "+")) : 
  invalid model formula in ExtractVars

Replication of prior self-anchoring findings: Self-evaluations predicting ingroup evaluations

m <- glmer( ingChoiceN ~ selfResp.Z + ( selfResp.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
library(broom.mixed)
tidy(m,conf.int=TRUE,exponentiate=TRUE,effects="fixed")
r2beta(m)
m <- glmer( ingChoiceN ~ SE.Z + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
tidy(m,conf.int=TRUE,exponentiate=TRUE,effects="fixed")
r2beta(m)
m <- glmer( ingChoiceN ~ SE.Z * novel + ( SE.Z + novel | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
tidy(m,conf.int=TRUE,exponentiate=TRUE,effects="fixed")
r2beta(m)
m <- glmer( as.factor(ingChoiceN) ~ SE.Z + scale(desirability) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ scale(oSE) + ( scale(oSE) | subID) + (1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ scale(iSE) + ( scale(iSE) | subID) + (1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel) + ( SE.Z + as.factor(novel) | subID) + (  1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel) + scale(desirability) + ( SE.Z + as.factor(novel) + scale(desirability) | subID) + (  1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "novel")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(RSE) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(RSE) + (SE.Z | subID) +      (1 | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2099.8   2142.9  -1041.9   2083.8     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.1305 -0.9859  0.5201  0.9041  1.3766 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01490  0.1221       
 subID  (Intercept) 0.42374  0.6510       
        SE.Z        0.07242  0.2691   0.61
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                Estimate Std. Error z value Pr(>|z|)  
(Intercept)      0.38812    0.20487   1.895   0.0582 .
SE.Z             0.17205    0.09906   1.737   0.0824 .
scale(RSE)       0.27966    0.20533   1.362   0.1732  
SE.Z:scale(RSE)  0.09111    0.09918   0.919   0.3583  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z  s(RSE)
SE.Z        0.493              
scale(RSE)  0.020  0.014       
SE.Z:s(RSE) 0.014  0.053 0.492 
ggpredict(m, c("SE.Z", "RSE")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SCC) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(SCC) + (SE.Z | subID) +      (1 | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2097.6   2140.7  -1040.8   2081.6     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.1113 -0.9834  0.5061  0.9048  1.3666 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01530  0.1237       
 subID  (Intercept) 0.38775  0.6227       
        SE.Z        0.08051  0.2837   0.75
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                 Estimate Std. Error z value Pr(>|z|)  
(Intercept)     0.3883397  0.1967117   1.974   0.0484 *
SE.Z            0.1716780  0.1026918   1.672   0.0946 .
scale(SCC)      0.3372548  0.1983321   1.700   0.0890 .
SE.Z:scale(SCC) 0.0002394  0.1042905   0.002   0.9982  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z  s(SCC)
SE.Z        0.611              
scale(SCC)  0.027  0.017       
SE.Z:s(SCC) 0.016  0.054 0.606 
ggpredict(m, c("SE.Z", "SCC")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(DS) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(DS) + (SE.Z | subID) + (1 |      trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2099.4   2142.4  -1041.7   2083.4     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.0522 -0.9857  0.5130  0.8996  1.3949 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01487  0.1219       
 subID  (Intercept) 0.40043  0.6328       
        SE.Z        0.07830  0.2798   0.66
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
               Estimate Std. Error z value Pr(>|z|)  
(Intercept)     0.38580    0.19959   1.933   0.0532 .
SE.Z            0.17169    0.10167   1.689   0.0913 .
scale(DS)      -0.30090    0.19883  -1.513   0.1302  
SE.Z:scale(DS) -0.04533    0.10075  -0.450   0.6527  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   sc(DS)
SE.Z         0.540              
scale(DS)   -0.015 -0.010       
SE.Z:sc(DS) -0.009 -0.032  0.542
ggpredict(m, c("SE.Z", "DS")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(NFC) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(NFC) + (SE.Z | subID) +      (1 | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2101.4   2144.4  -1042.7   2085.4     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.1259 -0.9855  0.5086  0.9013  1.3777 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01483  0.1218       
 subID  (Intercept) 0.49188  0.7013       
        SE.Z        0.07863  0.2804   0.65
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                Estimate Std. Error z value Pr(>|z|)  
(Intercept)      0.38828    0.21946   1.769   0.0769 .
SE.Z             0.17204    0.10186   1.689   0.0912 .
scale(NFC)      -0.10187    0.21805  -0.467   0.6404  
SE.Z:scale(NFC) -0.04587    0.10017  -0.458   0.6470  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   s(NFC)
SE.Z         0.535              
scale(NFC)  -0.009 -0.006       
SE.Z:s(NFC) -0.006 -0.022  0.540
ggpredict(m, c("SE.Z", "NFC")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SING.Ind) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(SING.Ind) + (SE.Z | subID) +      (1 | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2100.4   2143.5  -1042.2   2084.4     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.1434 -0.9862  0.5333  0.9109  1.4058 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01541  0.1242       
 subID  (Intercept) 0.49920  0.7065       
        SE.Z        0.07072  0.2659   0.68
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                     Estimate Std. Error z value Pr(>|z|)  
(Intercept)           0.38703    0.22099   1.751   0.0799 .
SE.Z                  0.17143    0.09828   1.744   0.0811 .
scale(SING.Ind)       0.04858    0.21944   0.221   0.8248  
SE.Z:scale(SING.Ind)  0.10087    0.09616   1.049   0.2942  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   s(SING
SE.Z         0.554              
scl(SING.I)  0.004 -0.001       
SE.Z:(SING.  0.000  0.002  0.562
ggpredict(m, c("SE.Z", "SING.Ind")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SING.Inter) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(SING.Inter) + (SE.Z | subID) +      (1 | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2099.0   2142.0  -1041.5   2083.0     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.0963 -0.9847  0.5114  0.9046  1.3709 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01497  0.1223       
 subID  (Intercept) 0.38660  0.6218       
        SE.Z        0.07111  0.2667   0.60
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                       Estimate Std. Error z value Pr(>|z|)  
(Intercept)             0.38734    0.19643   1.972   0.0486 *
SE.Z                    0.17113    0.09844   1.738   0.0822 .
scale(SING.Inter)      -0.33416    0.19710  -1.695   0.0900 .
SE.Z:scale(SING.Inter) -0.09474    0.09924  -0.955   0.3398  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   s(SING
SE.Z         0.484              
scl(SING.I) -0.024 -0.014       
SE.Z:(SING. -0.013 -0.049  0.486
ggpredict(m, c("SE.Z", "SING.Inter")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(Proto) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(Proto) + (SE.Z | subID) +      (1 | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2097.8   2140.9  -1040.9   2081.8     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.2393 -0.9825  0.4972  0.9025  1.3777 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01527  0.1236       
 subID  (Intercept) 0.38403  0.6197       
        SE.Z        0.07835  0.2799   0.69
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                  Estimate Std. Error z value Pr(>|z|)  
(Intercept)        0.39377    0.19597   2.009   0.0445 *
SE.Z               0.17037    0.10195   1.671   0.0947 .
scale(Proto)      -0.37819    0.20439  -1.850   0.0643 .
SE.Z:scale(Proto) -0.03303    0.11144  -0.296   0.7669  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   scl(P)
SE.Z         0.560              
scale(Prot) -0.042 -0.024       
SE.Z:scl(P) -0.021 -0.086  0.541
ggpredict(m, c("SE.Z", "Proto")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SI) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(SI) + (SE.Z | subID) + (1 |      trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2093.5   2136.5  -1038.7   2077.5     1596 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.5357 -0.9836  0.5497  0.8931  1.7206 

Random effects:
 Groups Name        Variance Std.Dev. Corr
 trait  (Intercept) 0.01110  0.1054       
 subID  (Intercept) 0.50315  0.7093       
        SE.Z        0.04302  0.2074   0.95
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
               Estimate Std. Error z value Pr(>|z|)   
(Intercept)     0.39170    0.22185   1.766  0.07746 . 
SE.Z            0.18399    0.08448   2.178  0.02941 * 
scale(SI)      -0.05844    0.22055  -0.265  0.79103   
SE.Z:scale(SI) -0.21866    0.08254  -2.649  0.00807 **
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   sc(SI)
SE.Z         0.701              
scale(SI)   -0.009 -0.010       
SE.Z:sc(SI) -0.011 -0.041  0.707
ggpredict(m, c("SE.Z", "SI")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(NTB) + ( SE.Z | subID) + ( SE.Z | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ SE.Z * scale(NTB) + (SE.Z | subID) +      (SE.Z | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2105.3   2159.1  -1042.6   2085.3     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.1207 -0.9849  0.5092  0.8972  1.3783 

Random effects:
 Groups Name        Variance  Std.Dev. Corr 
 trait  (Intercept) 1.487e-02 0.121954      
        SE.Z        2.509e-05 0.005009 -1.00
 subID  (Intercept) 4.902e-01 0.700153      
        SE.Z        8.022e-02 0.283237 0.66 
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                  Estimate Std. Error z value Pr(>|z|)  
(Intercept)      3.884e-01  2.191e-01   1.772   0.0763 .
SE.Z             1.717e-01  1.026e-01   1.672   0.0945 .
scale(NTB)      -1.108e-01  2.181e-01  -0.508   0.6115  
SE.Z:scale(NTB)  8.541e-05  1.016e-01   0.001   0.9993  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) SE.Z   s(NTB)
SE.Z         0.545              
scale(NTB)  -0.010 -0.006       
SE.Z:s(NTB) -0.006 -0.024  0.551
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("SE.Z", "NTB")) %>% plot()
Data were 'prettified'. Consider using `terms="SE.Z [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(RSE) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(RSE) + (scale(desirability) |  
    subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2111.4   2165.2  -1045.7   2091.4     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.8959 -1.0144  0.4983  0.9101  1.4069 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.003365 0.05801      
        scale(desirability) 0.013385 0.11570  1.00
 subID  (Intercept)         0.415188 0.64435      
        scale(desirability) 0.001739 0.04170  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                               Estimate Std. Error z value Pr(>|z|)    
(Intercept)                     0.38520    0.20272   1.900  0.05741 .  
scale(desirability)             0.20932    0.06083   3.441  0.00058 ***
scale(RSE)                      0.27336    0.20314   1.346  0.17840    
scale(desirability):scale(RSE)  0.02205    0.05729   0.385  0.70036    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) s(RSE)
scl(dsrblt) 0.218               
scale(RSE)  0.019  0.019        
scl():(RSE) 0.018  0.128  0.233 
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "RSE")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SCC) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(SCC) + (scale(desirability) |  
    subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2110.0   2163.8  -1045.0   2090.0     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.8913 -1.0078  0.4896  0.9083  1.3630 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.003354 0.05791      
        scale(desirability) 0.013479 0.11610  1.00
 subID  (Intercept)         0.377675 0.61455      
        scale(desirability) 0.003107 0.05574  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                                Estimate Std. Error z value Pr(>|z|)    
(Intercept)                     0.385673   0.194130   1.987 0.046958 *  
scale(desirability)             0.209174   0.061907   3.379 0.000728 ***
scale(SCC)                      0.337654   0.195663   1.726 0.084403 .  
scale(desirability):scale(SCC) -0.006718   0.060638  -0.111 0.911783    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) s(SCC)
scl(dsrblt) 0.280               
scale(SCC)  0.026  0.024        
scl():(SCC) 0.022  0.143  0.293 
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "SCC")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(DS) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(DS) + (scale(desirability) |  
    subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2110.9   2164.7  -1045.4   2090.9     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.7639 -1.0088  0.4900  0.9098  1.4151 

Random effects:
 Groups Name                Variance  Std.Dev. Corr
 trait  (Intercept)         0.0034089 0.05839      
        scale(desirability) 0.0134907 0.11615  1.00
 subID  (Intercept)         0.3893895 0.62401      
        scale(desirability) 0.0009413 0.03068  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                              Estimate Std. Error z value Pr(>|z|)    
(Intercept)                    0.38260    0.19680   1.944 0.051891 .  
scale(desirability)            0.20787    0.06023   3.451 0.000558 ***
scale(DS)                     -0.29740    0.19610  -1.517 0.129382    
scale(desirability):scale(DS) -0.03279    0.05445  -0.602 0.547033    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) sc(DS)
scl(dsrblt)  0.167              
scale(DS)   -0.015 -0.017       
scl(d):(DS) -0.015 -0.103  0.179
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "DS")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(NFC) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(NFC) + (scale(desirability) |  
    subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2112.6   2166.4  -1046.3   2092.6     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.8577 -1.0118  0.4889  0.9094  1.3512 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.003355 0.05792      
        scale(desirability) 0.013450 0.11598  1.00
 subID  (Intercept)         0.479426 0.69241      
        scale(desirability) 0.002268 0.04762  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                               Estimate Std. Error z value Pr(>|z|)    
(Intercept)                     0.38517    0.21664   1.778 0.075415 .  
scale(desirability)             0.20871    0.06126   3.407 0.000657 ***
scale(NFC)                     -0.09903    0.21536  -0.460 0.645636    
scale(desirability):scale(NFC)  0.01942    0.05410   0.359 0.719619    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) s(NFC)
scl(dsrblt)  0.244              
scale(NFC)  -0.008 -0.007       
scl():(NFC) -0.008 -0.060  0.264
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "NFC")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SING.Ind) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(SING.Ind) +  
    (scale(desirability) | subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2112.7   2166.5  -1046.4   2092.7     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.8602 -1.0147  0.4910  0.9059  1.3673 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.003337 0.05777      
        scale(desirability) 0.013431 0.11589  1.00
 subID  (Intercept)         0.486231 0.69730      
        scale(desirability) 0.002218 0.04710  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                                    Estimate Std. Error z value Pr(>|z|)    
(Intercept)                          0.38508    0.21806   1.766 0.077406 .  
scale(desirability)                  0.20921    0.06118   3.419 0.000628 ***
scale(SING.Ind)                      0.05346    0.21663   0.247 0.805083    
scale(desirability):scale(SING.Ind) -0.02489    0.05367  -0.464 0.642820    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) s(SING
scl(dsrblt) 0.242               
scl(SING.I) 0.004  0.003        
s():(SING.I 0.003  0.022  0.263 
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "SING.Ind")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SING.Inter) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(SING.Inter) +  
    (scale(desirability) | subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2110.4   2164.2  -1045.2   2090.4     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.8256 -1.0104  0.4934  0.9125  1.4150 

Random effects:
 Groups Name                Variance  Std.Dev. Corr
 trait  (Intercept)         0.0000000 0.00000      
        scale(desirability) 0.0093185 0.09653   NaN
 subID  (Intercept)         0.3727206 0.61051      
        scale(desirability) 0.0009385 0.03063  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                                      Estimate Std. Error z value Pr(>|z|)    
(Intercept)                            0.38345    0.19280   1.989 0.046716 *  
scale(desirability)                    0.20642    0.05956   3.466 0.000529 ***
scale(SING.Inter)                     -0.33236    0.19353  -1.717 0.085910 .  
scale(desirability):scale(SING.Inter) -0.03516    0.05701  -0.617 0.537466    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) s(SING
scl(dsrblt)  0.162              
scl(SING.I) -0.022 -0.021       
s():(SING.I -0.020 -0.138  0.177
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "SING.Inter")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(Proto) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(Proto) +  
    (scale(desirability) | subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2109.2   2163.0  -1044.6   2089.2     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.3834 -1.0152  0.5623  0.9062  1.4029 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.003279 0.05726      
        scale(desirability) 0.013172 0.11477  1.00
 subID  (Intercept)         0.374479 0.61195      
        scale(desirability) 0.001273 0.03567  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                                 Estimate Std. Error z value Pr(>|z|)    
(Intercept)                       0.39167    0.19357   2.023 0.043030 *  
scale(desirability)               0.21533    0.06089   3.536 0.000406 ***
scale(Proto)                     -0.38362    0.20188  -1.900 0.057402 .  
scale(desirability):scale(Proto) -0.07237    0.07165  -1.010 0.312464    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) scl(P)
scl(dsrblt)  0.194              
scale(Prot) -0.043 -0.047       
scl(ds):(P) -0.038 -0.225  0.229
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "Proto")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SI) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(SI) + (scale(desirability) |  
    subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2110.7   2164.5  -1045.4   2090.7     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-4.0641 -1.0169  0.5313  0.9074  1.5565 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.003313 0.05756      
        scale(desirability) 0.013506 0.11621  1.00
 subID  (Intercept)         0.492358 0.70168      
        scale(desirability) 0.002593 0.05092  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                              Estimate Std. Error z value Pr(>|z|)    
(Intercept)                    0.38629    0.21938   1.761 0.078272 .  
scale(desirability)            0.21359    0.06156   3.470 0.000521 ***
scale(SI)                     -0.05538    0.21816  -0.254 0.799632    
scale(desirability):scale(SI) -0.08211    0.05464  -1.503 0.132865    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) sc(SI)
scl(dsrblt)  0.260              
scale(SI)   -0.008 -0.009       
scl(d):(SI) -0.009 -0.079  0.280
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "SI")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(NTB) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
boundary (singular) fit: see help('isSingular')
summary(m)
Generalized linear mixed model fit by maximum likelihood (Laplace Approximation) ['glmerMod']
 Family: binomial  ( logit )
Formula: as.factor(ingChoiceN) ~ scale(desirability) * scale(NTB) + (scale(desirability) |  
    subID) + (scale(desirability) | trait)
   Data: fullTest
Control: glmerControl(optimizer = "bobyqa", optCtrl = list(maxfun = 1e+05))

     AIC      BIC   logLik deviance df.resid 
  2112.8   2166.6  -1046.4   2092.8     1594 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-3.9039 -1.0131  0.5005  0.9074  1.4211 

Random effects:
 Groups Name                Variance Std.Dev. Corr
 trait  (Intercept)         0.00337  0.05805      
        scale(desirability) 0.01338  0.11569  1.00
 subID  (Intercept)         0.47789  0.69130      
        scale(desirability) 0.00211  0.04593  1.00
Number of obs: 1604, groups:  trait, 148; subID, 11

Fixed effects:
                               Estimate Std. Error z value Pr(>|z|)    
(Intercept)                     0.38539    0.21633   1.782  0.07483 .  
scale(desirability)             0.20947    0.06112   3.427  0.00061 ***
scale(NTB)                     -0.11353    0.21542  -0.527  0.59819    
scale(desirability):scale(NTB) -0.01384    0.05480  -0.253  0.80060    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) scl(d) s(NTB)
scl(dsrblt)  0.237              
scale(NTB)  -0.010 -0.010       
scl():(NTB) -0.009 -0.075  0.255
optimizer (bobyqa) convergence code: 0 (OK)
boundary (singular) fit: see help('isSingular')
ggpredict(m, c("desirability", "NTB")) %>% plot()
Data were 'prettified'. Consider using `terms="desirability [all]"` to get smooth plots.

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(RSE) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z*as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "novel","RSE")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SCC) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SCC")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(DS) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","DS")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(NFC) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel", "NFC")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SING.Ind) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SING.Ind")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SING.Inter) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SING.Inter")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(Proto) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "Proto")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SI) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SI")) %>% plot()
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(NTB) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "novel", "NTB")) %>% plot()
---
title: "R Notebook"
output: html_notebook
---

```{r}
library(lmerTest)
library(ggeffects)
library(dplyr)
library(report)
library(r2glmm)
```

```{r}
fullTest <- read.csv("../Cleaning/output/fullTest.csv")
fullTest$ingChoiceN <- as.factor(fullTest$ingChoiceN)
fullTest$novel <- as.factor(fullTest$novel)
fullTest$selfResp.Z <- scale(fullTest$selfResp)
fullTest$SE.Z <- scale(fullTest$SE)
fullTest$iSE.Z <- scale(fullTest$iSE)
fullTest$oSE.Z <- scale(fullTest$oSE)

traitsFreqs <- read.csv("../Cleaning/output/traitFreqOverUnder.csv")

uSubs <- unique(fullTest$subID)

library(performance)
```

```{r}
m <- glmer( ingChoiceN ~ trait + ( 1 | subID) , data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)

fullTest$trait <- as.factor(fullTest$trait)
contrasts(fullTest$trait) <- contr.sum(148)
m <- glm(optionChoiceN ~ trait, family = binomial,
          data = fullTest
         )
summary(m)
```


```{r}
traitsFreqs$trait <- as.factor(traitsFreqs$trait)
contrasts(traitsFreqs$trait) <- contr.sum(148)
m <- glm(optionChoiceN ~ 1, family = binomial,
          data = traitsFreqs
         )
summary(m)
check_overdispersion(m)
check_model(m)

m <- glm(optionChoiceN ~ trait, family = quasibinomial,
          data = traitsFreqs
         )
check_overdispersion(m)
check_model(m)

t.test()

m <- glm(optionChoiceN ~ trait, family = poisson,
          data = traitsFreqs
         )
check_overdispersion(m)
check_model(m)
```


# Replication of prior self-anchoring findings: Self-evaluations predicting ingroup evaluations

```{r}
m <- glmer( ingChoiceN ~ selfResp.Z + ( selfResp.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
library(broom.mixed)
tidy(m,conf.int=TRUE,exponentiate=TRUE,effects="fixed")
r2beta(m)
```

```{r}
m <- glmer( ingChoiceN ~ SE.Z + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
tidy(m,conf.int=TRUE,exponentiate=TRUE,effects="fixed")
r2beta(m)
```

```{r}
m <- glmer( ingChoiceN ~ SE.Z * novel + ( SE.Z + novel | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
tidy(m,conf.int=TRUE,exponentiate=TRUE,effects="fixed")
r2beta(m)
```


```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z + scale(desirability) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ scale(oSE) + ( scale(oSE) | subID) + (1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ scale(iSE) + ( scale(iSE) | subID) + (1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel) + ( SE.Z + as.factor(novel) | subID) + (  1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel) + scale(desirability) + ( SE.Z + as.factor(novel) + scale(desirability) | subID) + (  1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "novel")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(RSE) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "RSE")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SCC) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "SCC")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(DS) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "DS")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(NFC) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "NFC")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SING.Ind) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "SING.Ind")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SING.Inter) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "SING.Inter")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(Proto) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "Proto")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(SI) + ( SE.Z | subID) + ( 1 | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "SI")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ SE.Z*scale(NTB) + ( SE.Z | subID) + ( SE.Z | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE.Z", "NTB")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(RSE) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "RSE")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SCC) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "SCC")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(DS) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "DS")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(NFC) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "NFC")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SING.Ind) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "SING.Ind")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SING.Inter) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "SING.Inter")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(Proto) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "Proto")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(SI) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "SI")) %>% plot()

m <- glmer( as.factor(ingChoiceN) ~ scale(desirability)*scale(NTB) + ( scale(desirability) | subID) + ( scale(desirability) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("desirability", "NTB")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(RSE) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z*as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "novel","RSE")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SCC) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SCC")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(DS) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","DS")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(NFC) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel", "NFC")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SING.Ind) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SING.Ind")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SING.Inter) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SING.Inter")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(Proto) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "Proto")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(SI) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE","novel","SI")) %>% plot()
```

```{r}
m <- glmer( as.factor(ingChoiceN) ~ SE.Z*as.factor(novel)*scale(NTB) + ( SE.Z+as.factor(novel) | subID) + ( SE.Z+as.factor(novel) | trait), data = fullTest, family = binomial, control = glmerControl(optimizer = "bobyqa",
                                    optCtrl = list(maxfun = 100000)),
    nAGQ = 1)
summary(m)
ggpredict(m, c("SE", "novel", "NTB")) %>% plot()
```


